950ad8701f220155fda057c0b9ee17d03610f614,src/de/jungblut/classification/regression/LogisticRegressionCostFunction.java,LogisticRegressionCostFunction,evaluateCost,#DoubleVector#,53

Before Change



    DoubleVector regGradient = eye.multiply(lambda).multiplyVectorRow(input);
    DoubleVector gradient = x.transpose()
        .multiplyVectorRow(hypo.subtract(y).getRowVector(0)).add(regGradient)
        .divide(m);

    return new CostGradientTuple(j, gradient);

After Change


  public CostGradientTuple evaluateCost(DoubleVector input) {

    DoubleVector activation = SIGMOID.get().apply(x.multiplyVectorRow(input));
    DenseDoubleMatrix hypo = new DenseDoubleMatrix(Arrays.asList(activation));
    double error = ERROR_FUNCTION.calculateError(y, hypo);
    DoubleMatrix loss = hypo.subtract(y);
    double j = error / m;
    DoubleVector gradient = xTransposed.multiplyVectorRow(loss.getRowVector(0))
        .divide(m);